!pip install nbconvert
%shell jupyter nbconvert --to html /content/LogisticRegression_prac_01.ipynb
Titan - Train dataset¶
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
%matplotlib inline
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import confusion_matrix, classification_report
df = pd.read_csv('/content/titanic_train.csv')
df.head(2)
| PassengerId | Survived | Pclass | Name | Sex | Age | SibSp | Parch | Ticket | Fare | Cabin | Embarked | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 1 | 0 | 3 | Braund, Mr. Owen Harris | male | 22.0 | 1 | 0 | A/5 21171 | 7.2500 | NaN | S |
| 1 | 2 | 1 | 1 | Cumings, Mrs. John Bradley (Florence Briggs Th... | female | 38.0 | 1 | 0 | PC 17599 | 71.2833 | C85 | C |
Exploratory Analysis on basic import of dataset¶
df.isnull()
| PassengerId | Survived | Pclass | Name | Sex | Age | SibSp | Parch | Ticket | Fare | Cabin | Embarked | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | False | False | False | False | False | False | False | False | False | False | True | False |
| 1 | False | False | False | False | False | False | False | False | False | False | False | False |
| 2 | False | False | False | False | False | False | False | False | False | False | True | False |
| 3 | False | False | False | False | False | False | False | False | False | False | False | False |
| 4 | False | False | False | False | False | False | False | False | False | False | True | False |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 886 | False | False | False | False | False | False | False | False | False | False | True | False |
| 887 | False | False | False | False | False | False | False | False | False | False | False | False |
| 888 | False | False | False | False | False | True | False | False | False | False | True | False |
| 889 | False | False | False | False | False | False | False | False | False | False | False | False |
| 890 | False | False | False | False | False | False | False | False | False | False | True | False |
891 rows × 12 columns
sns.heatmap(df.isnull(), yticklabels= False, cbar = False, cmap = "Blues")
<matplotlib.axes._subplots.AxesSubplot at 0x7f9335ad1c10>
sns.countplot(x='Survived', data = df)
<matplotlib.axes._subplots.AxesSubplot at 0x7f933c3f0750>
sns.countplot(x = 'Survived', hue = "Sex", data = df)
<matplotlib.axes._subplots.AxesSubplot at 0x7f933c2d55d0>
sns.countplot(x='Survived', hue = "Pclass", data = df)
<matplotlib.axes._subplots.AxesSubplot at 0x7f933c032c90>
df.drop('PassengerId', inplace = True, axis = 1)
sns.pairplot(df)
<seaborn.axisgrid.PairGrid at 0x7f933998e890>
df.head(2)
| Survived | Pclass | Name | Sex | Age | SibSp | Parch | Ticket | Fare | Cabin | Embarked | |
|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0 | 3 | Braund, Mr. Owen Harris | male | 22.0 | 1 | 0 | A/5 21171 | 7.2500 | NaN | S |
| 1 | 1 | 1 | Cumings, Mrs. John Bradley (Florence Briggs Th... | female | 38.0 | 1 | 0 | PC 17599 | 71.2833 | C85 | C |
sns.boxplot(x = 'Pclass', y = 'Age', data = df)
<matplotlib.axes._subplots.AxesSubplot at 0x7f933620eb50>
def impute_age(cols):
Age = cols[0]
Pclass = cols[1]
if pd.isnull(Age) == True:
if Pclass == 1:
return 37
elif Pclass == 2:
return 32
else:
return 24
else:
return Age
df['Age'] = df[['Age', 'Pclass']].apply(impute_age, axis = 1)
sns.heatmap(df.isnull(), cmap = 'Blues')
<matplotlib.axes._subplots.AxesSubplot at 0x7f9335d6c950>
df.head(2)
| Survived | Pclass | Name | Sex | Age | SibSp | Parch | Ticket | Fare | Cabin | Embarked | |
|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0 | 3 | Braund, Mr. Owen Harris | male | 22.0 | 1 | 0 | A/5 21171 | 7.2500 | NaN | S |
| 1 | 1 | 1 | Cumings, Mrs. John Bradley (Florence Briggs Th... | female | 38.0 | 1 | 0 | PC 17599 | 71.2833 | C85 | C |
df.drop(['Name', 'Ticket'], inplace = True, axis = 1)
df.head(2)
| Survived | Pclass | Sex | Age | SibSp | Parch | Fare | Cabin | Embarked | |
|---|---|---|---|---|---|---|---|---|---|
| 0 | 0 | 3 | male | 22.0 | 1 | 0 | 7.2500 | NaN | S |
| 1 | 1 | 1 | female | 38.0 | 1 | 0 | 71.2833 | C85 | C |
sex_df = pd.get_dummies(df['Sex'], drop_first=True)
Embark_df = pd.get_dummies(df['Embarked'], drop_first = True)
sex_df
| male | |
|---|---|
| 0 | 1 |
| 1 | 0 |
| 2 | 0 |
| 3 | 0 |
| 4 | 1 |
| ... | ... |
| 886 | 1 |
| 887 | 0 |
| 888 | 0 |
| 889 | 1 |
| 890 | 1 |
891 rows × 1 columns
Embark_df
| Q | S | |
|---|---|---|
| 0 | 0 | 1 |
| 1 | 0 | 0 |
| 2 | 0 | 1 |
| 3 | 0 | 1 |
| 4 | 0 | 1 |
| ... | ... | ... |
| 886 | 0 | 1 |
| 887 | 0 | 1 |
| 888 | 0 | 1 |
| 889 | 0 | 0 |
| 890 | 1 | 0 |
891 rows × 2 columns
df = pd.concat([df, sex_df, Embark_df], axis = 1)
df.drop('Cabin', inplace = True, axis = 1)
logreg = LogisticRegression()
df
| Survived | Pclass | Sex | Age | SibSp | Parch | Fare | Embarked | male | Q | S | |
|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0 | 3 | male | 22.0 | 1 | 0 | 7.2500 | S | 1 | 0 | 1 |
| 1 | 1 | 1 | female | 38.0 | 1 | 0 | 71.2833 | C | 0 | 0 | 0 |
| 2 | 1 | 3 | female | 26.0 | 0 | 0 | 7.9250 | S | 0 | 0 | 1 |
| 3 | 1 | 1 | female | 35.0 | 1 | 0 | 53.1000 | S | 0 | 0 | 1 |
| 4 | 0 | 3 | male | 35.0 | 0 | 0 | 8.0500 | S | 1 | 0 | 1 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 886 | 0 | 2 | male | 27.0 | 0 | 0 | 13.0000 | S | 1 | 0 | 1 |
| 887 | 1 | 1 | female | 19.0 | 0 | 0 | 30.0000 | S | 0 | 0 | 1 |
| 888 | 0 | 3 | female | 24.0 | 1 | 2 | 23.4500 | S | 0 | 0 | 1 |
| 889 | 1 | 1 | male | 26.0 | 0 | 0 | 30.0000 | C | 1 | 0 | 0 |
| 890 | 0 | 3 | male | 32.0 | 0 | 0 | 7.7500 | Q | 1 | 1 | 0 |
891 rows × 11 columns
df.drop('Sex', inplace = True, axis = 1)
df.drop('Embarked', inplace = True, axis = 1)
df
| Survived | Pclass | Age | SibSp | Parch | Fare | male | Q | S | |
|---|---|---|---|---|---|---|---|---|---|
| 0 | 0 | 3 | 22.0 | 1 | 0 | 7.2500 | 1 | 0 | 1 |
| 1 | 1 | 1 | 38.0 | 1 | 0 | 71.2833 | 0 | 0 | 0 |
| 2 | 1 | 3 | 26.0 | 0 | 0 | 7.9250 | 0 | 0 | 1 |
| 3 | 1 | 1 | 35.0 | 1 | 0 | 53.1000 | 0 | 0 | 1 |
| 4 | 0 | 3 | 35.0 | 0 | 0 | 8.0500 | 1 | 0 | 1 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 886 | 0 | 2 | 27.0 | 0 | 0 | 13.0000 | 1 | 0 | 1 |
| 887 | 1 | 1 | 19.0 | 0 | 0 | 30.0000 | 0 | 0 | 1 |
| 888 | 0 | 3 | 24.0 | 1 | 2 | 23.4500 | 0 | 0 | 1 |
| 889 | 1 | 1 | 26.0 | 0 | 0 | 30.0000 | 1 | 0 | 0 |
| 890 | 0 | 3 | 32.0 | 0 | 0 | 7.7500 | 1 | 1 | 0 |
891 rows × 9 columns
from sklearn.model_selection import train_test_split
X = df.drop('Survived', axis = 1)
y = df['Survived']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=101)
logreg.fit(X_train, y_train)
/usr/local/lib/python3.7/dist-packages/sklearn/linear_model/_logistic.py:818: ConvergenceWarning: lbfgs failed to converge (status=1):
STOP: TOTAL NO. of ITERATIONS REACHED LIMIT.
Increase the number of iterations (max_iter) or scale the data as shown in:
https://scikit-learn.org/stable/modules/preprocessing.html
Please also refer to the documentation for alternative solver options:
https://scikit-learn.org/stable/modules/linear_model.html#logistic-regression
extra_warning_msg=_LOGISTIC_SOLVER_CONVERGENCE_MSG,
LogisticRegression()
predictions = logreg.predict(X_test)
predictions
array([0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0,
1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0,
1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1,
0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1,
1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0,
0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0,
1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1,
0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1,
0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0,
1, 0, 0, 1])
df
| Survived | Pclass | Age | SibSp | Parch | Fare | male | Q | S | |
|---|---|---|---|---|---|---|---|---|---|
| 0 | 0 | 3 | 22.0 | 1 | 0 | 7.2500 | 1 | 0 | 1 |
| 1 | 1 | 1 | 38.0 | 1 | 0 | 71.2833 | 0 | 0 | 0 |
| 2 | 1 | 3 | 26.0 | 0 | 0 | 7.9250 | 0 | 0 | 1 |
| 3 | 1 | 1 | 35.0 | 1 | 0 | 53.1000 | 0 | 0 | 1 |
| 4 | 0 | 3 | 35.0 | 0 | 0 | 8.0500 | 1 | 0 | 1 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 886 | 0 | 2 | 27.0 | 0 | 0 | 13.0000 | 1 | 0 | 1 |
| 887 | 1 | 1 | 19.0 | 0 | 0 | 30.0000 | 0 | 0 | 1 |
| 888 | 0 | 3 | 24.0 | 1 | 2 | 23.4500 | 0 | 0 | 1 |
| 889 | 1 | 1 | 26.0 | 0 | 0 | 30.0000 | 1 | 0 | 0 |
| 890 | 0 | 3 | 32.0 | 0 | 0 | 7.7500 | 1 | 1 | 0 |
891 rows × 9 columns
print(classification_report(y_test, predictions))
precision recall f1-score support
0 0.78 0.87 0.82 154
1 0.79 0.67 0.72 114
accuracy 0.78 268
macro avg 0.79 0.77 0.77 268
weighted avg 0.78 0.78 0.78 268
confusion_matrix(y_test, predictions)
array([[134, 20],
[ 38, 76]])
sns.heatmap(df, cmap = 'Blues')
<matplotlib.axes._subplots.AxesSubplot at 0x7f9335e6c3d0>
df
| Survived | Pclass | Age | SibSp | Parch | Fare | male | Q | S | |
|---|---|---|---|---|---|---|---|---|---|
| 0 | 0 | 3 | 22.0 | 1 | 0 | 7.2500 | 1 | 0 | 1 |
| 1 | 1 | 1 | 38.0 | 1 | 0 | 71.2833 | 0 | 0 | 0 |
| 2 | 1 | 3 | 26.0 | 0 | 0 | 7.9250 | 0 | 0 | 1 |
| 3 | 1 | 1 | 35.0 | 1 | 0 | 53.1000 | 0 | 0 | 1 |
| 4 | 0 | 3 | 35.0 | 0 | 0 | 8.0500 | 1 | 0 | 1 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 886 | 0 | 2 | 27.0 | 0 | 0 | 13.0000 | 1 | 0 | 1 |
| 887 | 1 | 1 | 19.0 | 0 | 0 | 30.0000 | 0 | 0 | 1 |
| 888 | 0 | 3 | 24.0 | 1 | 2 | 23.4500 | 0 | 0 | 1 |
| 889 | 1 | 1 | 26.0 | 0 | 0 | 30.0000 | 1 | 0 | 0 |
| 890 | 0 | 3 | 32.0 | 0 | 0 | 7.7500 | 1 | 1 | 0 |
891 rows × 9 columns
df.head(2)
| Survived | Pclass | Age | SibSp | Parch | Fare | male | Q | S | |
|---|---|---|---|---|---|---|---|---|---|
| 0 | 0 | 3 | 22.0 | 1 | 0 | 7.2500 | 1 | 0 | 1 |
| 1 | 1 | 1 | 38.0 | 1 | 0 | 71.2833 | 0 | 0 | 0 |
Task¶
Convert the current notebook to an HTML file. The notebook will first be saved to /content/LogisticRegression_prac_01.ipynb and then converted to HTML. A file existence check will be performed to ensure the notebook is saved before the conversion.
Reasoning:
The subtask requires extracting the notebook content and saving it to a file. This code block imports the json module, defines the notebook path, retrieves the current notebook content as a Python dictionary using Javascript, converts it to a JSON string, and then writes the JSON string to the specified file path.
import json
from google.colab import output
import time
notebook_path = '/content/LogisticRegression_prac_01.ipynb'
JS_CODE = '''
(function() {
// Try window.colab.global.notebook.toJSON() first, then fallback to window.colab.shell.toJSON()
if (typeof window.colab !== 'undefined' && typeof window.colab.global !== 'undefined' && typeof window.colab.global.notebook !== 'undefined') {
return JSON.stringify(window.colab.global.notebook.toJSON());
} else if (typeof window.colab !== 'undefined' && typeof window.colab.shell !== 'undefined') {
return JSON.stringify(window.colab.shell.toJSON());
} else {
return '{}'; // Return an empty JSON object if colab components are not available
}
})();
'''
max_retries = 5
retry_delay_seconds = 2
notebook_content = '{}'
for i in range(max_retries):
print(f"Attempt {i + 1}/{max_retries}: Retrieving notebook content...")
content = output.eval_js(JS_CODE)
# Check if the content is not just an empty JSON string
if content and content != '{}' and 'cells' in json.loads(content):
notebook_content = content
print("Notebook content successfully retrieved.")
break
else:
print(f"Failed to retrieve complete notebook content. Retrying in {retry_delay_seconds} seconds...")
time.sleep(retry_delay_seconds)
if notebook_content == '{}' or 'cells' not in json.loads(notebook_content):
print("Error: Could not retrieve valid notebook content after multiple attempts.")
else:
# Write the JSON string to the specified notebook path
with open(notebook_path, 'w') as f:
f.write(notebook_content)
print(f"Notebook content saved to {notebook_path}")
Attempt 1/5: Retrieving notebook content... Failed to retrieve complete notebook content. Retrying in 2 seconds... Attempt 2/5: Retrieving notebook content... Failed to retrieve complete notebook content. Retrying in 2 seconds... Attempt 3/5: Retrieving notebook content... Failed to retrieve complete notebook content. Retrying in 2 seconds... Attempt 4/5: Retrieving notebook content... Failed to retrieve complete notebook content. Retrying in 2 seconds... Attempt 5/5: Retrieving notebook content... Failed to retrieve complete notebook content. Retrying in 2 seconds... Error: Could not retrieve valid notebook content after multiple attempts.
Task¶
Convert the current notebook to an HTML file. The notebook will first be saved to /content/LogisticRegression_prac_01.ipynb and then converted to HTML. A file existence check will be performed to ensure the notebook is saved before the conversion.
Reasoning:
To perform the file existence check with a timeout, I need to import the os and time modules, define the notebook path and timeout, and then implement a polling loop. This code block will encapsulate all these requirements.
import os
import time
notebook_path = '/content/LogisticRegression_prac_01.ipynb'
timeout_seconds = 60 # Set an appropriate timeout, e.g., 60 seconds
start_time = time.time()
file_saved = False
while not file_saved and (time.time() - start_time < timeout_seconds):
if os.path.exists(notebook_path):
file_saved = True
print(f"Notebook found at {notebook_path}")
else:
print(f"Notebook not yet found at {notebook_path}. Waiting...")
time.sleep(1) # Wait for 1 second before checking again
if not file_saved:
print(f"Error: Notebook not found at {notebook_path} within {timeout_seconds} seconds.")
Notebook found at /content/LogisticRegression_prac_01.ipynb
Reasoning:
The subtask requires converting the notebook to an HTML file using the jupyter nbconvert shell command. This code block will execute that command.
!pip install nbconvert
%shell jupyter nbconvert --to html /content/LogisticRegression_prac_01.ipynb
Requirement already satisfied: nbconvert in /usr/local/lib/python3.12/dist-packages (7.17.0)
Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.12/dist-packages (from nbconvert) (4.13.5)
Requirement already satisfied: bleach!=5.0.0 in /usr/local/lib/python3.12/dist-packages (from bleach[css]!=5.0.0->nbconvert) (6.3.0)
Requirement already satisfied: defusedxml in /usr/local/lib/python3.12/dist-packages (from nbconvert) (0.7.1)
Requirement already satisfied: jinja2>=3.0 in /usr/local/lib/python3.12/dist-packages (from nbconvert) (3.1.6)
Requirement already satisfied: jupyter-core>=4.7 in /usr/local/lib/python3.12/dist-packages (from nbconvert) (5.9.1)
Requirement already satisfied: jupyterlab-pygments in /usr/local/lib/python3.12/dist-packages (from nbconvert) (0.3.0)
Requirement already satisfied: markupsafe>=2.0 in /usr/local/lib/python3.12/dist-packages (from nbconvert) (3.0.3)
Requirement already satisfied: mistune<4,>=2.0.3 in /usr/local/lib/python3.12/dist-packages (from nbconvert) (3.2.0)
Requirement already satisfied: nbclient>=0.5.0 in /usr/local/lib/python3.12/dist-packages (from nbconvert) (0.10.4)
Requirement already satisfied: nbformat>=5.7 in /usr/local/lib/python3.12/dist-packages (from nbconvert) (5.10.4)
Requirement already satisfied: packaging in /usr/local/lib/python3.12/dist-packages (from nbconvert) (26.0)
Requirement already satisfied: pandocfilters>=1.4.1 in /usr/local/lib/python3.12/dist-packages (from nbconvert) (1.5.1)
Requirement already satisfied: pygments>=2.4.1 in /usr/local/lib/python3.12/dist-packages (from nbconvert) (2.19.2)
Requirement already satisfied: traitlets>=5.1 in /usr/local/lib/python3.12/dist-packages (from nbconvert) (5.7.1)
Requirement already satisfied: webencodings in /usr/local/lib/python3.12/dist-packages (from bleach!=5.0.0->bleach[css]!=5.0.0->nbconvert) (0.5.1)
Requirement already satisfied: tinycss2<1.5,>=1.1.0 in /usr/local/lib/python3.12/dist-packages (from bleach[css]!=5.0.0->nbconvert) (1.4.0)
Requirement already satisfied: platformdirs>=2.5 in /usr/local/lib/python3.12/dist-packages (from jupyter-core>=4.7->nbconvert) (4.9.4)
Requirement already satisfied: jupyter-client>=6.1.12 in /usr/local/lib/python3.12/dist-packages (from nbclient>=0.5.0->nbconvert) (7.4.9)
Requirement already satisfied: fastjsonschema>=2.15 in /usr/local/lib/python3.12/dist-packages (from nbformat>=5.7->nbconvert) (2.21.2)
Requirement already satisfied: jsonschema>=2.6 in /usr/local/lib/python3.12/dist-packages (from nbformat>=5.7->nbconvert) (4.26.0)
Requirement already satisfied: soupsieve>1.2 in /usr/local/lib/python3.12/dist-packages (from beautifulsoup4->nbconvert) (2.8.3)
Requirement already satisfied: typing-extensions>=4.0.0 in /usr/local/lib/python3.12/dist-packages (from beautifulsoup4->nbconvert) (4.15.0)
Requirement already satisfied: attrs>=22.2.0 in /usr/local/lib/python3.12/dist-packages (from jsonschema>=2.6->nbformat>=5.7->nbconvert) (25.4.0)
Requirement already satisfied: jsonschema-specifications>=2023.03.6 in /usr/local/lib/python3.12/dist-packages (from jsonschema>=2.6->nbformat>=5.7->nbconvert) (2025.9.1)
Requirement already satisfied: referencing>=0.28.4 in /usr/local/lib/python3.12/dist-packages (from jsonschema>=2.6->nbformat>=5.7->nbconvert) (0.37.0)
Requirement already satisfied: rpds-py>=0.25.0 in /usr/local/lib/python3.12/dist-packages (from jsonschema>=2.6->nbformat>=5.7->nbconvert) (0.30.0)
Requirement already satisfied: entrypoints in /usr/local/lib/python3.12/dist-packages (from jupyter-client>=6.1.12->nbclient>=0.5.0->nbconvert) (0.4)
Requirement already satisfied: nest-asyncio>=1.5.4 in /usr/local/lib/python3.12/dist-packages (from jupyter-client>=6.1.12->nbclient>=0.5.0->nbconvert) (1.6.0)
Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.12/dist-packages (from jupyter-client>=6.1.12->nbclient>=0.5.0->nbconvert) (2.9.0.post0)
Requirement already satisfied: pyzmq>=23.0 in /usr/local/lib/python3.12/dist-packages (from jupyter-client>=6.1.12->nbclient>=0.5.0->nbconvert) (26.2.1)
Requirement already satisfied: tornado>=6.2 in /usr/local/lib/python3.12/dist-packages (from jupyter-client>=6.1.12->nbclient>=0.5.0->nbconvert) (6.5.1)
Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.12/dist-packages (from python-dateutil>=2.8.2->jupyter-client>=6.1.12->nbclient>=0.5.0->nbconvert) (1.17.0)
[NbConvertApp] Converting notebook /content/LogisticRegression_prac_01.ipynb to html
Traceback (most recent call last):
File "/usr/local/bin/jupyter-nbconvert", line 10, in <module>
sys.exit(main())
^^^^^^
File "/usr/local/lib/python3.12/dist-packages/jupyter_core/application.py", line 284, in launch_instance
super().launch_instance(argv=argv, **kwargs)
File "/usr/local/lib/python3.12/dist-packages/traitlets/config/application.py", line 992, in launch_instance
app.start()
File "/usr/local/lib/python3.12/dist-packages/nbconvert/nbconvertapp.py", line 420, in start
self.convert_notebooks()
File "/usr/local/lib/python3.12/dist-packages/nbconvert/nbconvertapp.py", line 597, in convert_notebooks
self.convert_single_notebook(notebook_filename)
File "/usr/local/lib/python3.12/dist-packages/nbconvert/nbconvertapp.py", line 563, in convert_single_notebook
output, resources = self.export_single_notebook(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/nbconvert/nbconvertapp.py", line 487, in export_single_notebook
output, resources = self.exporter.from_filename(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/nbconvert/exporters/templateexporter.py", line 390, in from_filename
return super().from_filename(filename, resources, **kw) # type:ignore[return-value]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/nbconvert/exporters/exporter.py", line 201, in from_filename
return self.from_file(f, resources=resources, **kw)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/nbconvert/exporters/templateexporter.py", line 396, in from_file
return super().from_file(file_stream, resources, **kw) # type:ignore[return-value]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/nbconvert/exporters/exporter.py", line 221, in from_file
nbformat.read(file_stream, as_version=4), resources=resources, **kw
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/nbformat/__init__.py", line 174, in read
return reads(buf, as_version, capture_validation_error, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/nbformat/__init__.py", line 94, in reads
nb = convert(nb, as_version)
^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/nbformat/converter.py", line 63, in convert
raise ValidationError(msg) from None
jsonschema.exceptions.ValidationError: Notebook could not be converted from version 1 to version 2 because it's missing a key: cells
--------------------------------------------------------------------------- CalledProcessError Traceback (most recent call last) /tmp/ipykernel_2810/232916469.py in <cell line: 0>() 1 get_ipython().system('pip install nbconvert') ----> 2 get_ipython().run_line_magic('shell', 'jupyter nbconvert --to html /content/LogisticRegression_prac_01.ipynb') /usr/local/lib/python3.12/dist-packages/IPython/core/interactiveshell.py in run_line_magic(self, magic_name, line, _stack_depth) 2416 kwargs['local_ns'] = self.get_local_scope(stack_depth) 2417 with self.builtin_trap: -> 2418 result = fn(*args, **kwargs) 2419 return result 2420 /usr/local/lib/python3.12/dist-packages/google/colab/_system_commands.py in _shell_line_magic(line) 65 """ 66 result = _run_command(line, clear_streamed_output=False) ---> 67 result.check_returncode() 68 return result 69 /usr/local/lib/python3.12/dist-packages/google/colab/_system_commands.py in check_returncode(self) 135 def check_returncode(self): 136 if self.returncode: --> 137 raise subprocess.CalledProcessError( 138 returncode=self.returncode, cmd=self.args, output=self.output 139 ) CalledProcessError: Command 'jupyter nbconvert --to html /content/LogisticRegression_prac_01.ipynb' returned non-zero exit status 1.